From 8d0759fbca8e34062371f01cc09844fdc6bba2a5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 7 Feb 2023 15:18:32 -0600 Subject: [PATCH 1/4] chore: Update gapic-generator-python to v1.8.4 (#456) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.4 PiperOrigin-RevId: 507808936 Source-Link: https://github.com/googleapis/googleapis/commit/64cf8492b21778ce62c66ecee81b468a293bfd4c Source-Link: https://github.com/googleapis/googleapis-gen/commit/53c48cac153d3b37f3d2c2dec4830cfd91ec4153 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTNjNDhjYWMxNTNkM2IzN2YzZDJjMmRlYzQ4MzBjZmQ5MWVjNDE1MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.documentai.v1.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta2.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta3.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 2cfe2fad..0df77e65 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index 7e9df76f..ef56bd7e 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 16239d0e..f02752f1 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.11.0" + "version": "0.1.0" }, "snippets": [ { From 7f7d34145f4c6eefa68c40bfcacc90eb88c3ae5b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:10:30 +0000 Subject: [PATCH 2/4] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#458) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f0f3b24b..894fb6bc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc4672..096e4800 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From 02d06d663a99062a98b6dc0b699ffdec38e2e5de Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 17:25:15 -0600 Subject: [PATCH 3/4] feat: enable "rest" transport in Python for services supporting numeric enums (#459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/documentai_v1/gapic_metadata.json | 95 + .../document_processor_service/client.py | 2 + .../transports/__init__.py | 7 + .../transports/rest.py | 3272 ++++++++ .../documentai_v1beta3/gapic_metadata.json | 115 + .../document_processor_service/client.py | 2 + .../transports/__init__.py | 7 + .../transports/rest.py | 3824 +++++++++ .../test_document_processor_service.py | 5671 +++++++++++++- .../test_document_processor_service.py | 6887 ++++++++++++++++- 10 files changed, 19688 insertions(+), 194 deletions(-) create mode 100644 google/cloud/documentai_v1/services/document_processor_service/transports/rest.py create mode 100644 google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py diff --git a/google/cloud/documentai_v1/gapic_metadata.json b/google/cloud/documentai_v1/gapic_metadata.json index 7bc1c8e7..2b34dcff 100644 --- a/google/cloud/documentai_v1/gapic_metadata.json +++ b/google/cloud/documentai_v1/gapic_metadata.json @@ -196,6 +196,101 @@ ] } } + }, + "rest": { + "libraryClient": "DocumentProcessorServiceClient", + "rpcs": { + "BatchProcessDocuments": { + "methods": [ + "batch_process_documents" + ] + }, + "CreateProcessor": { + "methods": [ + "create_processor" + ] + }, + "DeleteProcessor": { + "methods": [ + "delete_processor" + ] + }, + "DeleteProcessorVersion": { + "methods": [ + "delete_processor_version" + ] + }, + "DeployProcessorVersion": { + "methods": [ + "deploy_processor_version" + ] + }, + "DisableProcessor": { + "methods": [ + "disable_processor" + ] + }, + "EnableProcessor": { + "methods": [ + "enable_processor" + ] + }, + "FetchProcessorTypes": { + "methods": [ + "fetch_processor_types" + ] + }, + "GetProcessor": { + "methods": [ + "get_processor" + ] + }, + "GetProcessorType": { + "methods": [ + "get_processor_type" + ] + }, + "GetProcessorVersion": { + "methods": [ + "get_processor_version" + ] + }, + "ListProcessorTypes": { + "methods": [ + "list_processor_types" + ] + }, + "ListProcessorVersions": { + "methods": [ + "list_processor_versions" + ] + }, + "ListProcessors": { + "methods": [ + "list_processors" + ] + }, + "ProcessDocument": { + "methods": [ + "process_document" + ] + }, + "ReviewDocument": { + "methods": [ + "review_document" + ] + }, + "SetDefaultProcessorVersion": { + "methods": [ + "set_default_processor_version" + ] + }, + "UndeployProcessorVersion": { + "methods": [ + "undeploy_processor_version" + ] + } + } } } } diff --git a/google/cloud/documentai_v1/services/document_processor_service/client.py b/google/cloud/documentai_v1/services/document_processor_service/client.py index 255b9828..b541d7a8 100644 --- a/google/cloud/documentai_v1/services/document_processor_service/client.py +++ b/google/cloud/documentai_v1/services/document_processor_service/client.py @@ -67,6 +67,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DocumentProcessorServiceTransport from .transports.grpc import DocumentProcessorServiceGrpcTransport from .transports.grpc_asyncio import DocumentProcessorServiceGrpcAsyncIOTransport +from .transports.rest import DocumentProcessorServiceRestTransport class DocumentProcessorServiceClientMeta(type): @@ -82,6 +83,7 @@ class DocumentProcessorServiceClientMeta(type): ) # type: Dict[str, Type[DocumentProcessorServiceTransport]] _transport_registry["grpc"] = DocumentProcessorServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentProcessorServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentProcessorServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/documentai_v1/services/document_processor_service/transports/__init__.py b/google/cloud/documentai_v1/services/document_processor_service/transports/__init__.py index e4955876..f482e7c3 100644 --- a/google/cloud/documentai_v1/services/document_processor_service/transports/__init__.py +++ b/google/cloud/documentai_v1/services/document_processor_service/transports/__init__.py @@ -19,6 +19,10 @@ from .base import DocumentProcessorServiceTransport from .grpc import DocumentProcessorServiceGrpcTransport from .grpc_asyncio import DocumentProcessorServiceGrpcAsyncIOTransport +from .rest import ( + DocumentProcessorServiceRestInterceptor, + DocumentProcessorServiceRestTransport, +) # Compile a registry of transports. _transport_registry = ( @@ -26,9 +30,12 @@ ) # type: Dict[str, Type[DocumentProcessorServiceTransport]] _transport_registry["grpc"] = DocumentProcessorServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentProcessorServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentProcessorServiceRestTransport __all__ = ( "DocumentProcessorServiceTransport", "DocumentProcessorServiceGrpcTransport", "DocumentProcessorServiceGrpcAsyncIOTransport", + "DocumentProcessorServiceRestTransport", + "DocumentProcessorServiceRestInterceptor", ) diff --git a/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py b/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py new file mode 100644 index 00000000..f5569439 --- /dev/null +++ b/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py @@ -0,0 +1,3272 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.documentai_v1.types import document_processor_service +from google.cloud.documentai_v1.types import processor +from google.cloud.documentai_v1.types import processor as gcd_processor +from google.cloud.documentai_v1.types import processor_type + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentProcessorServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentProcessorServiceRestInterceptor: + """Interceptor for DocumentProcessorService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentProcessorServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentProcessorServiceInterceptor(DocumentProcessorServiceRestInterceptor): + def pre_batch_process_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_process_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deploy_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deploy_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_processor_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_processor_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processor_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processor_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processor_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processor_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_process_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_process_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_review_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_review_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_default_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_default_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_undeploy_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_undeploy_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentProcessorServiceRestTransport(interceptor=MyCustomDocumentProcessorServiceInterceptor()) + client = DocumentProcessorServiceClient(transport=transport) + + + """ + + def pre_batch_process_documents( + self, + request: document_processor_service.BatchProcessRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.BatchProcessRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_process_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_batch_process_documents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_process_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_create_processor( + self, + request: document_processor_service.CreateProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.CreateProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_create_processor( + self, response: gcd_processor.Processor + ) -> gcd_processor.Processor: + """Post-rpc interceptor for create_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_delete_processor( + self, + request: document_processor_service.DeleteProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeleteProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_delete_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_delete_processor_version( + self, + request: document_processor_service.DeleteProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeleteProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_delete_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_deploy_processor_version( + self, + request: document_processor_service.DeployProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeployProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for deploy_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_deploy_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for deploy_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_disable_processor( + self, + request: document_processor_service.DisableProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DisableProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for disable_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_disable_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_enable_processor( + self, + request: document_processor_service.EnableProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.EnableProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for enable_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_enable_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_fetch_processor_types( + self, + request: document_processor_service.FetchProcessorTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.FetchProcessorTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for fetch_processor_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_fetch_processor_types( + self, response: document_processor_service.FetchProcessorTypesResponse + ) -> document_processor_service.FetchProcessorTypesResponse: + """Post-rpc interceptor for fetch_processor_types + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor( + self, + request: document_processor_service.GetProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor(self, response: processor.Processor) -> processor.Processor: + """Post-rpc interceptor for get_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor_type( + self, + request: document_processor_service.GetProcessorTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorTypeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor_type( + self, response: processor_type.ProcessorType + ) -> processor_type.ProcessorType: + """Post-rpc interceptor for get_processor_type + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor_version( + self, + request: document_processor_service.GetProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorVersionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor_version( + self, response: processor.ProcessorVersion + ) -> processor.ProcessorVersion: + """Post-rpc interceptor for get_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processors( + self, + request: document_processor_service.ListProcessorsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_processors + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processors( + self, response: document_processor_service.ListProcessorsResponse + ) -> document_processor_service.ListProcessorsResponse: + """Post-rpc interceptor for list_processors + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processor_types( + self, + request: document_processor_service.ListProcessorTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_processor_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processor_types( + self, response: document_processor_service.ListProcessorTypesResponse + ) -> document_processor_service.ListProcessorTypesResponse: + """Post-rpc interceptor for list_processor_types + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processor_versions( + self, + request: document_processor_service.ListProcessorVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorVersionsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_processor_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processor_versions( + self, response: document_processor_service.ListProcessorVersionsResponse + ) -> document_processor_service.ListProcessorVersionsResponse: + """Post-rpc interceptor for list_processor_versions + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_process_document( + self, + request: document_processor_service.ProcessRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_processor_service.ProcessRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for process_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_process_document( + self, response: document_processor_service.ProcessResponse + ) -> document_processor_service.ProcessResponse: + """Post-rpc interceptor for process_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_review_document( + self, + request: document_processor_service.ReviewDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ReviewDocumentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for review_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_review_document( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for review_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_set_default_processor_version( + self, + request: document_processor_service.SetDefaultProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.SetDefaultProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_default_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_set_default_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for set_default_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_undeploy_processor_version( + self, + request: document_processor_service.UndeployProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.UndeployProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for undeploy_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_undeploy_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for undeploy_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentProcessorServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentProcessorServiceRestInterceptor + + +class DocumentProcessorServiceRestTransport(DocumentProcessorServiceTransport): + """REST backend transport for DocumentProcessorService. + + Service to call Cloud DocumentAI to process documents + according to the processor's definition. Processors are built + using state-of-the-art Google AI such as natural language, + computer vision, and translation to extract structured + information from unstructured or semi-structured documents. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "documentai.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentProcessorServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentProcessorServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchProcessDocuments(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("BatchProcessDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.BatchProcessRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch process documents method over HTTP. + + Args: + request (~.document_processor_service.BatchProcessRequest): + The request object. Request message for batch process + document method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*}:batchProcess", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:batchProcess", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_process_documents( + request, metadata + ) + pb_request = document_processor_service.BatchProcessRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_process_documents(resp) + return resp + + class _CreateProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("CreateProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.CreateProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_processor.Processor: + r"""Call the create processor method over HTTP. + + Args: + request (~.document_processor_service.CreateProcessorRequest): + The request object. Request message for create a + processor. Notice this request is sent + to a regionalized backend service, and + if the processor type is not available + on that region, the creation will fail. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_processor.Processor: + The first-class citizen for Document + AI. Each processor defines how to + extract structural information from a + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/processors", + "body": "processor", + }, + ] + request, metadata = self._interceptor.pre_create_processor( + request, metadata + ) + pb_request = document_processor_service.CreateProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_processor.Processor() + pb_resp = gcd_processor.Processor.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_processor(resp) + return resp + + class _DeleteProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeleteProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeleteProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete processor method over HTTP. + + Args: + request (~.document_processor_service.DeleteProcessorRequest): + The request object. Request message for the delete + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/processors/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_processor( + request, metadata + ) + pb_request = document_processor_service.DeleteProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_processor(resp) + return resp + + class _DeleteProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeleteProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeleteProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete processor version method over HTTP. + + Args: + request (~.document_processor_service.DeleteProcessorVersionRequest): + The request object. Request message for the delete + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_processor_version( + request, metadata + ) + pb_request = document_processor_service.DeleteProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_processor_version(resp) + return resp + + class _DeployProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeployProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeployProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the deploy processor version method over HTTP. + + Args: + request (~.document_processor_service.DeployProcessorVersionRequest): + The request object. Request message for the deploy + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:deploy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_deploy_processor_version( + request, metadata + ) + pb_request = document_processor_service.DeployProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deploy_processor_version(resp) + return resp + + class _DisableProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DisableProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DisableProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable processor method over HTTP. + + Args: + request (~.document_processor_service.DisableProcessorRequest): + The request object. Request message for the disable + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*}:disable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_processor( + request, metadata + ) + pb_request = document_processor_service.DisableProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_processor(resp) + return resp + + class _EnableProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("EnableProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.EnableProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable processor method over HTTP. + + Args: + request (~.document_processor_service.EnableProcessorRequest): + The request object. Request message for the enable + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*}:enable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_processor( + request, metadata + ) + pb_request = document_processor_service.EnableProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_processor(resp) + return resp + + class _FetchProcessorTypes(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("FetchProcessorTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.FetchProcessorTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.FetchProcessorTypesResponse: + r"""Call the fetch processor types method over HTTP. + + Args: + request (~.document_processor_service.FetchProcessorTypesRequest): + The request object. Request message for fetch processor + types. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.FetchProcessorTypesResponse: + Response message for fetch processor + types. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}:fetchProcessorTypes", + }, + ] + request, metadata = self._interceptor.pre_fetch_processor_types( + request, metadata + ) + pb_request = document_processor_service.FetchProcessorTypesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.FetchProcessorTypesResponse() + pb_resp = document_processor_service.FetchProcessorTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_processor_types(resp) + return resp + + class _GetProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor.Processor: + r"""Call the get processor method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorRequest): + The request object. Request message for get processor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor.Processor: + The first-class citizen for Document + AI. Each processor defines how to + extract structural information from a + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/processors/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor(request, metadata) + pb_request = document_processor_service.GetProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor.Processor() + pb_resp = processor.Processor.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor(resp) + return resp + + class _GetProcessorType(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessorType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor_type.ProcessorType: + r"""Call the get processor type method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorTypeRequest): + The request object. Request message for get processor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor_type.ProcessorType: + A processor type is responsible for + performing a certain document + understanding task on a certain type of + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/processorTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor_type( + request, metadata + ) + pb_request = document_processor_service.GetProcessorTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor_type.ProcessorType() + pb_resp = processor_type.ProcessorType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor_type(resp) + return resp + + class _GetProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor.ProcessorVersion: + r"""Call the get processor version method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorVersionRequest): + The request object. Request message for get processor + version. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor.ProcessorVersion: + A processor version is an + implementation of a processor. Each + processor can have multiple versions, + pre-trained by Google internally or + up-trained by the customer. At a time, a + processor can only have one default + version version. So the processor's + behavior (when processing documents) is + defined by a default version + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor_version( + request, metadata + ) + pb_request = document_processor_service.GetProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor.ProcessorVersion() + pb_resp = processor.ProcessorVersion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor_version(resp) + return resp + + class _ListProcessors(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessors") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorsResponse: + r"""Call the list processors method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorsRequest): + The request object. Request message for list all + processors belongs to a project. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorsResponse: + Response message for list processors. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/processors", + }, + ] + request, metadata = self._interceptor.pre_list_processors(request, metadata) + pb_request = document_processor_service.ListProcessorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorsResponse() + pb_resp = document_processor_service.ListProcessorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processors(resp) + return resp + + class _ListProcessorTypes(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessorTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorTypesResponse: + r"""Call the list processor types method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorTypesRequest): + The request object. Request message for list processor + types. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorTypesResponse: + Response message for list processor + types. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/processorTypes", + }, + ] + request, metadata = self._interceptor.pre_list_processor_types( + request, metadata + ) + pb_request = document_processor_service.ListProcessorTypesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorTypesResponse() + pb_resp = document_processor_service.ListProcessorTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processor_types(resp) + return resp + + class _ListProcessorVersions(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessorVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorVersionsResponse: + r"""Call the list processor versions method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorVersionsRequest): + The request object. Request message for list all + processor versions belongs to a + processor. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorVersionsResponse: + Response message for list processors. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/processors/*}/processorVersions", + }, + ] + request, metadata = self._interceptor.pre_list_processor_versions( + request, metadata + ) + pb_request = document_processor_service.ListProcessorVersionsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorVersionsResponse() + pb_resp = document_processor_service.ListProcessorVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processor_versions(resp) + return resp + + class _ProcessDocument(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ProcessDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ProcessRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ProcessResponse: + r"""Call the process document method over HTTP. + + Args: + request (~.document_processor_service.ProcessRequest): + The request object. Request message for the process + document method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ProcessResponse: + Response message for the process + document method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*}:process", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:process", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_process_document( + request, metadata + ) + pb_request = document_processor_service.ProcessRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ProcessResponse() + pb_resp = document_processor_service.ProcessResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_process_document(resp) + return resp + + class _ReviewDocument(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ReviewDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ReviewDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the review document method over HTTP. + + Args: + request (~.document_processor_service.ReviewDocumentRequest): + The request object. Request message for review document + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{human_review_config=projects/*/locations/*/processors/*/humanReviewConfig}:reviewDocument", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_review_document(request, metadata) + pb_request = document_processor_service.ReviewDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_review_document(resp) + return resp + + class _SetDefaultProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("SetDefaultProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.SetDefaultProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the set default processor + version method over HTTP. + + Args: + request (~.document_processor_service.SetDefaultProcessorVersionRequest): + The request object. Request message for the set default + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{processor=projects/*/locations/*/processors/*}:setDefaultProcessorVersion", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_default_processor_version( + request, metadata + ) + pb_request = ( + document_processor_service.SetDefaultProcessorVersionRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_default_processor_version(resp) + return resp + + class _UndeployProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("UndeployProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.UndeployProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the undeploy processor + version method over HTTP. + + Args: + request (~.document_processor_service.UndeployProcessorVersionRequest): + The request object. Request message for the undeploy + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:undeploy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_undeploy_processor_version( + request, metadata + ) + pb_request = document_processor_service.UndeployProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_undeploy_processor_version(resp) + return resp + + @property + def batch_process_documents( + self, + ) -> Callable[ + [document_processor_service.BatchProcessRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchProcessDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_processor( + self, + ) -> Callable[ + [document_processor_service.CreateProcessorRequest], gcd_processor.Processor + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_processor( + self, + ) -> Callable[ + [document_processor_service.DeleteProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_processor_version( + self, + ) -> Callable[ + [document_processor_service.DeleteProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def deploy_processor_version( + self, + ) -> Callable[ + [document_processor_service.DeployProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeployProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_processor( + self, + ) -> Callable[ + [document_processor_service.DisableProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_processor( + self, + ) -> Callable[ + [document_processor_service.EnableProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_processor_types( + self, + ) -> Callable[ + [document_processor_service.FetchProcessorTypesRequest], + document_processor_service.FetchProcessorTypesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchProcessorTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor( + self, + ) -> Callable[ + [document_processor_service.GetProcessorRequest], processor.Processor + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor_type( + self, + ) -> Callable[ + [document_processor_service.GetProcessorTypeRequest], + processor_type.ProcessorType, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessorType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor_version( + self, + ) -> Callable[ + [document_processor_service.GetProcessorVersionRequest], + processor.ProcessorVersion, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processors( + self, + ) -> Callable[ + [document_processor_service.ListProcessorsRequest], + document_processor_service.ListProcessorsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessors(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processor_types( + self, + ) -> Callable[ + [document_processor_service.ListProcessorTypesRequest], + document_processor_service.ListProcessorTypesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessorTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processor_versions( + self, + ) -> Callable[ + [document_processor_service.ListProcessorVersionsRequest], + document_processor_service.ListProcessorVersionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessorVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def process_document( + self, + ) -> Callable[ + [document_processor_service.ProcessRequest], + document_processor_service.ProcessResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProcessDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def review_document( + self, + ) -> Callable[ + [document_processor_service.ReviewDocumentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReviewDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_default_processor_version( + self, + ) -> Callable[ + [document_processor_service.SetDefaultProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetDefaultProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def undeploy_processor_version( + self, + ) -> Callable[ + [document_processor_service.UndeployProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UndeployProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DocumentProcessorServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentProcessorServiceRestTransport",) diff --git a/google/cloud/documentai_v1beta3/gapic_metadata.json b/google/cloud/documentai_v1beta3/gapic_metadata.json index f94368f8..95c46773 100644 --- a/google/cloud/documentai_v1beta3/gapic_metadata.json +++ b/google/cloud/documentai_v1beta3/gapic_metadata.json @@ -236,6 +236,121 @@ ] } } + }, + "rest": { + "libraryClient": "DocumentProcessorServiceClient", + "rpcs": { + "BatchProcessDocuments": { + "methods": [ + "batch_process_documents" + ] + }, + "CreateProcessor": { + "methods": [ + "create_processor" + ] + }, + "DeleteProcessor": { + "methods": [ + "delete_processor" + ] + }, + "DeleteProcessorVersion": { + "methods": [ + "delete_processor_version" + ] + }, + "DeployProcessorVersion": { + "methods": [ + "deploy_processor_version" + ] + }, + "DisableProcessor": { + "methods": [ + "disable_processor" + ] + }, + "EnableProcessor": { + "methods": [ + "enable_processor" + ] + }, + "EvaluateProcessorVersion": { + "methods": [ + "evaluate_processor_version" + ] + }, + "FetchProcessorTypes": { + "methods": [ + "fetch_processor_types" + ] + }, + "GetEvaluation": { + "methods": [ + "get_evaluation" + ] + }, + "GetProcessor": { + "methods": [ + "get_processor" + ] + }, + "GetProcessorType": { + "methods": [ + "get_processor_type" + ] + }, + "GetProcessorVersion": { + "methods": [ + "get_processor_version" + ] + }, + "ListEvaluations": { + "methods": [ + "list_evaluations" + ] + }, + "ListProcessorTypes": { + "methods": [ + "list_processor_types" + ] + }, + "ListProcessorVersions": { + "methods": [ + "list_processor_versions" + ] + }, + "ListProcessors": { + "methods": [ + "list_processors" + ] + }, + "ProcessDocument": { + "methods": [ + "process_document" + ] + }, + "ReviewDocument": { + "methods": [ + "review_document" + ] + }, + "SetDefaultProcessorVersion": { + "methods": [ + "set_default_processor_version" + ] + }, + "TrainProcessorVersion": { + "methods": [ + "train_processor_version" + ] + }, + "UndeployProcessorVersion": { + "methods": [ + "undeploy_processor_version" + ] + } + } } } } diff --git a/google/cloud/documentai_v1beta3/services/document_processor_service/client.py b/google/cloud/documentai_v1beta3/services/document_processor_service/client.py index b07d558f..21585c91 100644 --- a/google/cloud/documentai_v1beta3/services/document_processor_service/client.py +++ b/google/cloud/documentai_v1beta3/services/document_processor_service/client.py @@ -68,6 +68,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DocumentProcessorServiceTransport from .transports.grpc import DocumentProcessorServiceGrpcTransport from .transports.grpc_asyncio import DocumentProcessorServiceGrpcAsyncIOTransport +from .transports.rest import DocumentProcessorServiceRestTransport class DocumentProcessorServiceClientMeta(type): @@ -83,6 +84,7 @@ class DocumentProcessorServiceClientMeta(type): ) # type: Dict[str, Type[DocumentProcessorServiceTransport]] _transport_registry["grpc"] = DocumentProcessorServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentProcessorServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DocumentProcessorServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/documentai_v1beta3/services/document_processor_service/transports/__init__.py b/google/cloud/documentai_v1beta3/services/document_processor_service/transports/__init__.py index e4955876..f482e7c3 100644 --- a/google/cloud/documentai_v1beta3/services/document_processor_service/transports/__init__.py +++ b/google/cloud/documentai_v1beta3/services/document_processor_service/transports/__init__.py @@ -19,6 +19,10 @@ from .base import DocumentProcessorServiceTransport from .grpc import DocumentProcessorServiceGrpcTransport from .grpc_asyncio import DocumentProcessorServiceGrpcAsyncIOTransport +from .rest import ( + DocumentProcessorServiceRestInterceptor, + DocumentProcessorServiceRestTransport, +) # Compile a registry of transports. _transport_registry = ( @@ -26,9 +30,12 @@ ) # type: Dict[str, Type[DocumentProcessorServiceTransport]] _transport_registry["grpc"] = DocumentProcessorServiceGrpcTransport _transport_registry["grpc_asyncio"] = DocumentProcessorServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DocumentProcessorServiceRestTransport __all__ = ( "DocumentProcessorServiceTransport", "DocumentProcessorServiceGrpcTransport", "DocumentProcessorServiceGrpcAsyncIOTransport", + "DocumentProcessorServiceRestTransport", + "DocumentProcessorServiceRestInterceptor", ) diff --git a/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py b/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py new file mode 100644 index 00000000..2ec9b13c --- /dev/null +++ b/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py @@ -0,0 +1,3824 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.documentai_v1beta3.types import document_processor_service, evaluation +from google.cloud.documentai_v1beta3.types import processor +from google.cloud.documentai_v1beta3.types import processor as gcd_processor +from google.cloud.documentai_v1beta3.types import processor_type + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DocumentProcessorServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DocumentProcessorServiceRestInterceptor: + """Interceptor for DocumentProcessorService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DocumentProcessorServiceRestTransport. + + .. code-block:: python + class MyCustomDocumentProcessorServiceInterceptor(DocumentProcessorServiceRestInterceptor): + def pre_batch_process_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_process_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deploy_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deploy_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_disable_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_evaluate_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_evaluate_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_processor_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_processor_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_evaluation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_evaluation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_evaluations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_evaluations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processor_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processor_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_processor_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_processor_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_process_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_process_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_review_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_review_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_default_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_default_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_train_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_undeploy_processor_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_undeploy_processor_version(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DocumentProcessorServiceRestTransport(interceptor=MyCustomDocumentProcessorServiceInterceptor()) + client = DocumentProcessorServiceClient(transport=transport) + + + """ + + def pre_batch_process_documents( + self, + request: document_processor_service.BatchProcessRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.BatchProcessRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_process_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_batch_process_documents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_process_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_create_processor( + self, + request: document_processor_service.CreateProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.CreateProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_create_processor( + self, response: gcd_processor.Processor + ) -> gcd_processor.Processor: + """Post-rpc interceptor for create_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_delete_processor( + self, + request: document_processor_service.DeleteProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeleteProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_delete_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_delete_processor_version( + self, + request: document_processor_service.DeleteProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeleteProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_delete_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_deploy_processor_version( + self, + request: document_processor_service.DeployProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DeployProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for deploy_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_deploy_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for deploy_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_disable_processor( + self, + request: document_processor_service.DisableProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.DisableProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for disable_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_disable_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_enable_processor( + self, + request: document_processor_service.EnableProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.EnableProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for enable_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_enable_processor( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_evaluate_processor_version( + self, + request: document_processor_service.EvaluateProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.EvaluateProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for evaluate_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_evaluate_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for evaluate_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_fetch_processor_types( + self, + request: document_processor_service.FetchProcessorTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.FetchProcessorTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for fetch_processor_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_fetch_processor_types( + self, response: document_processor_service.FetchProcessorTypesResponse + ) -> document_processor_service.FetchProcessorTypesResponse: + """Post-rpc interceptor for fetch_processor_types + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_evaluation( + self, + request: document_processor_service.GetEvaluationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetEvaluationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_evaluation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_evaluation( + self, response: evaluation.Evaluation + ) -> evaluation.Evaluation: + """Post-rpc interceptor for get_evaluation + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor( + self, + request: document_processor_service.GetProcessorRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor(self, response: processor.Processor) -> processor.Processor: + """Post-rpc interceptor for get_processor + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor_type( + self, + request: document_processor_service.GetProcessorTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorTypeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor_type( + self, response: processor_type.ProcessorType + ) -> processor_type.ProcessorType: + """Post-rpc interceptor for get_processor_type + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_processor_version( + self, + request: document_processor_service.GetProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.GetProcessorVersionRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_processor_version( + self, response: processor.ProcessorVersion + ) -> processor.ProcessorVersion: + """Post-rpc interceptor for get_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_evaluations( + self, + request: document_processor_service.ListEvaluationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListEvaluationsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_evaluations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_evaluations( + self, response: document_processor_service.ListEvaluationsResponse + ) -> document_processor_service.ListEvaluationsResponse: + """Post-rpc interceptor for list_evaluations + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processors( + self, + request: document_processor_service.ListProcessorsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_processors + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processors( + self, response: document_processor_service.ListProcessorsResponse + ) -> document_processor_service.ListProcessorsResponse: + """Post-rpc interceptor for list_processors + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processor_types( + self, + request: document_processor_service.ListProcessorTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_processor_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processor_types( + self, response: document_processor_service.ListProcessorTypesResponse + ) -> document_processor_service.ListProcessorTypesResponse: + """Post-rpc interceptor for list_processor_types + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_processor_versions( + self, + request: document_processor_service.ListProcessorVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ListProcessorVersionsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_processor_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_processor_versions( + self, response: document_processor_service.ListProcessorVersionsResponse + ) -> document_processor_service.ListProcessorVersionsResponse: + """Post-rpc interceptor for list_processor_versions + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_process_document( + self, + request: document_processor_service.ProcessRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_processor_service.ProcessRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for process_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_process_document( + self, response: document_processor_service.ProcessResponse + ) -> document_processor_service.ProcessResponse: + """Post-rpc interceptor for process_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_review_document( + self, + request: document_processor_service.ReviewDocumentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.ReviewDocumentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for review_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_review_document( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for review_document + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_set_default_processor_version( + self, + request: document_processor_service.SetDefaultProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.SetDefaultProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_default_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_set_default_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for set_default_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_train_processor_version( + self, + request: document_processor_service.TrainProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.TrainProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for train_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_train_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_undeploy_processor_version( + self, + request: document_processor_service.UndeployProcessorVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_processor_service.UndeployProcessorVersionRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for undeploy_processor_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_undeploy_processor_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for undeploy_processor_version + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentProcessorService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DocumentProcessorService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DocumentProcessorServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DocumentProcessorServiceRestInterceptor + + +class DocumentProcessorServiceRestTransport(DocumentProcessorServiceTransport): + """REST backend transport for DocumentProcessorService. + + Service to call Cloud DocumentAI to process documents + according to the processor's definition. Processors are built + using state-of-the-art Google AI such as natural language, + computer vision, and translation to extract structured + information from unstructured or semi-structured documents. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "documentai.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DocumentProcessorServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DocumentProcessorServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/operations}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchProcessDocuments(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("BatchProcessDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.BatchProcessRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch process documents method over HTTP. + + Args: + request (~.document_processor_service.BatchProcessRequest): + The request object. Request message for batch process + document method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}:batchProcess", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:batchProcess", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_process_documents( + request, metadata + ) + pb_request = document_processor_service.BatchProcessRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_process_documents(resp) + return resp + + class _CreateProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("CreateProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.CreateProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_processor.Processor: + r"""Call the create processor method over HTTP. + + Args: + request (~.document_processor_service.CreateProcessorRequest): + The request object. Request message for create a + processor. Notice this request is sent + to a regionalized backend service, and + if the processor type is not available + on that region, the creation will fail. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_processor.Processor: + The first-class citizen for Document + AI. Each processor defines how to + extract structural information from a + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{parent=projects/*/locations/*}/processors", + "body": "processor", + }, + ] + request, metadata = self._interceptor.pre_create_processor( + request, metadata + ) + pb_request = document_processor_service.CreateProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_processor.Processor() + pb_resp = gcd_processor.Processor.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_processor(resp) + return resp + + class _DeleteProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeleteProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeleteProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete processor method over HTTP. + + Args: + request (~.document_processor_service.DeleteProcessorRequest): + The request object. Request message for the delete + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_processor( + request, metadata + ) + pb_request = document_processor_service.DeleteProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_processor(resp) + return resp + + class _DeleteProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeleteProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeleteProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete processor version method over HTTP. + + Args: + request (~.document_processor_service.DeleteProcessorVersionRequest): + The request object. Request message for the delete + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_processor_version( + request, metadata + ) + pb_request = document_processor_service.DeleteProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_processor_version(resp) + return resp + + class _DeployProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DeployProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DeployProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the deploy processor version method over HTTP. + + Args: + request (~.document_processor_service.DeployProcessorVersionRequest): + The request object. Request message for the deploy + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:deploy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_deploy_processor_version( + request, metadata + ) + pb_request = document_processor_service.DeployProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deploy_processor_version(resp) + return resp + + class _DisableProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("DisableProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.DisableProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable processor method over HTTP. + + Args: + request (~.document_processor_service.DisableProcessorRequest): + The request object. Request message for the disable + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}:disable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_disable_processor( + request, metadata + ) + pb_request = document_processor_service.DisableProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_processor(resp) + return resp + + class _EnableProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("EnableProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.EnableProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable processor method over HTTP. + + Args: + request (~.document_processor_service.EnableProcessorRequest): + The request object. Request message for the enable + processor method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}:enable", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_processor( + request, metadata + ) + pb_request = document_processor_service.EnableProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_processor(resp) + return resp + + class _EvaluateProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("EvaluateProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.EvaluateProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the evaluate processor + version method over HTTP. + + Args: + request (~.document_processor_service.EvaluateProcessorVersionRequest): + The request object. Evaluates the given ProcessorVersion + against the supplied documents. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{processor_version=projects/*/locations/*/processors/*/processorVersions/*}:evaluateProcessorVersion", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_evaluate_processor_version( + request, metadata + ) + pb_request = document_processor_service.EvaluateProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_evaluate_processor_version(resp) + return resp + + class _FetchProcessorTypes(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("FetchProcessorTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.FetchProcessorTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.FetchProcessorTypesResponse: + r"""Call the fetch processor types method over HTTP. + + Args: + request (~.document_processor_service.FetchProcessorTypesRequest): + The request object. Request message for fetch processor + types. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.FetchProcessorTypesResponse: + Response message for fetch processor + types. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{parent=projects/*/locations/*}:fetchProcessorTypes", + }, + ] + request, metadata = self._interceptor.pre_fetch_processor_types( + request, metadata + ) + pb_request = document_processor_service.FetchProcessorTypesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.FetchProcessorTypesResponse() + pb_resp = document_processor_service.FetchProcessorTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_processor_types(resp) + return resp + + class _GetEvaluation(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetEvaluation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetEvaluationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation.Evaluation: + r"""Call the get evaluation method over HTTP. + + Args: + request (~.document_processor_service.GetEvaluationRequest): + The request object. Retrieves a specific Evaluation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation.Evaluation: + An evaluation of a ProcessorVersion's + performance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*/evaluations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_evaluation(request, metadata) + pb_request = document_processor_service.GetEvaluationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = evaluation.Evaluation() + pb_resp = evaluation.Evaluation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_evaluation(resp) + return resp + + class _GetProcessor(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessor") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor.Processor: + r"""Call the get processor method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorRequest): + The request object. Request message for get processor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor.Processor: + The first-class citizen for Document + AI. Each processor defines how to + extract structural information from a + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor(request, metadata) + pb_request = document_processor_service.GetProcessorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor.Processor() + pb_resp = processor.Processor.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor(resp) + return resp + + class _GetProcessorType(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessorType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor_type.ProcessorType: + r"""Call the get processor type method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorTypeRequest): + The request object. Request message for get processor. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor_type.ProcessorType: + A processor type is responsible for + performing a certain document + understanding task on a certain type of + document. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/processorTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor_type( + request, metadata + ) + pb_request = document_processor_service.GetProcessorTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor_type.ProcessorType() + pb_resp = processor_type.ProcessorType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor_type(resp) + return resp + + class _GetProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("GetProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.GetProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> processor.ProcessorVersion: + r"""Call the get processor version method over HTTP. + + Args: + request (~.document_processor_service.GetProcessorVersionRequest): + The request object. Request message for get processor + version. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.processor.ProcessorVersion: + A processor version is an + implementation of a processor. Each + processor can have multiple versions, + pre-trained by Google internally or + up-trained by the customer. At a time, a + processor can only have one default + version version. So the processor's + behavior (when processing documents) is + defined by a default version + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_processor_version( + request, metadata + ) + pb_request = document_processor_service.GetProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = processor.ProcessorVersion() + pb_resp = processor.ProcessorVersion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_processor_version(resp) + return resp + + class _ListEvaluations(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListEvaluations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListEvaluationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListEvaluationsResponse: + r"""Call the list evaluations method over HTTP. + + Args: + request (~.document_processor_service.ListEvaluationsRequest): + The request object. Retrieves a list of evaluations for a + given ProcessorVersion. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListEvaluationsResponse: + The response from ListEvaluations. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{parent=projects/*/locations/*/processors/*/processorVersions/*}/evaluations", + }, + ] + request, metadata = self._interceptor.pre_list_evaluations( + request, metadata + ) + pb_request = document_processor_service.ListEvaluationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListEvaluationsResponse() + pb_resp = document_processor_service.ListEvaluationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_evaluations(resp) + return resp + + class _ListProcessors(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessors") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorsResponse: + r"""Call the list processors method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorsRequest): + The request object. Request message for list all + processors belongs to a project. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorsResponse: + Response message for list processors. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{parent=projects/*/locations/*}/processors", + }, + ] + request, metadata = self._interceptor.pre_list_processors(request, metadata) + pb_request = document_processor_service.ListProcessorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorsResponse() + pb_resp = document_processor_service.ListProcessorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processors(resp) + return resp + + class _ListProcessorTypes(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessorTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorTypesResponse: + r"""Call the list processor types method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorTypesRequest): + The request object. Request message for list processor + types. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorTypesResponse: + Response message for list processor + types. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{parent=projects/*/locations/*}/processorTypes", + }, + ] + request, metadata = self._interceptor.pre_list_processor_types( + request, metadata + ) + pb_request = document_processor_service.ListProcessorTypesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorTypesResponse() + pb_resp = document_processor_service.ListProcessorTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processor_types(resp) + return resp + + class _ListProcessorVersions(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ListProcessorVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ListProcessorVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ListProcessorVersionsResponse: + r"""Call the list processor versions method over HTTP. + + Args: + request (~.document_processor_service.ListProcessorVersionsRequest): + The request object. Request message for list all + processor versions belongs to a + processor. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ListProcessorVersionsResponse: + Response message for list processors. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{parent=projects/*/locations/*/processors/*}/processorVersions", + }, + ] + request, metadata = self._interceptor.pre_list_processor_versions( + request, metadata + ) + pb_request = document_processor_service.ListProcessorVersionsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ListProcessorVersionsResponse() + pb_resp = document_processor_service.ListProcessorVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_processor_versions(resp) + return resp + + class _ProcessDocument(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ProcessDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ProcessRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_processor_service.ProcessResponse: + r"""Call the process document method over HTTP. + + Args: + request (~.document_processor_service.ProcessRequest): + The request object. Request message for the process + document method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_processor_service.ProcessResponse: + Response message for the process + document method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*}:process", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:process", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_process_document( + request, metadata + ) + pb_request = document_processor_service.ProcessRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_processor_service.ProcessResponse() + pb_resp = document_processor_service.ProcessResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_process_document(resp) + return resp + + class _ReviewDocument(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("ReviewDocument") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.ReviewDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the review document method over HTTP. + + Args: + request (~.document_processor_service.ReviewDocumentRequest): + The request object. Request message for review document + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{human_review_config=projects/*/locations/*/processors/*/humanReviewConfig}:reviewDocument", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_review_document(request, metadata) + pb_request = document_processor_service.ReviewDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_review_document(resp) + return resp + + class _SetDefaultProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("SetDefaultProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.SetDefaultProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the set default processor + version method over HTTP. + + Args: + request (~.document_processor_service.SetDefaultProcessorVersionRequest): + The request object. Request message for the set default + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{processor=projects/*/locations/*/processors/*}:setDefaultProcessorVersion", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_default_processor_version( + request, metadata + ) + pb_request = ( + document_processor_service.SetDefaultProcessorVersionRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_default_processor_version(resp) + return resp + + class _TrainProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("TrainProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.TrainProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train processor version method over HTTP. + + Args: + request (~.document_processor_service.TrainProcessorVersionRequest): + The request object. Request message for the create + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{parent=projects/*/locations/*/processors/*}/processorVersions:train", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_processor_version( + request, metadata + ) + pb_request = document_processor_service.TrainProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_processor_version(resp) + return resp + + class _UndeployProcessorVersion(DocumentProcessorServiceRestStub): + def __hash__(self): + return hash("UndeployProcessorVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_processor_service.UndeployProcessorVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the undeploy processor + version method over HTTP. + + Args: + request (~.document_processor_service.UndeployProcessorVersionRequest): + The request object. Request message for the undeploy + processor version method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:undeploy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_undeploy_processor_version( + request, metadata + ) + pb_request = document_processor_service.UndeployProcessorVersionRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_undeploy_processor_version(resp) + return resp + + @property + def batch_process_documents( + self, + ) -> Callable[ + [document_processor_service.BatchProcessRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchProcessDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_processor( + self, + ) -> Callable[ + [document_processor_service.CreateProcessorRequest], gcd_processor.Processor + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_processor( + self, + ) -> Callable[ + [document_processor_service.DeleteProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_processor_version( + self, + ) -> Callable[ + [document_processor_service.DeleteProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def deploy_processor_version( + self, + ) -> Callable[ + [document_processor_service.DeployProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeployProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def disable_processor( + self, + ) -> Callable[ + [document_processor_service.DisableProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_processor( + self, + ) -> Callable[ + [document_processor_service.EnableProcessorRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def evaluate_processor_version( + self, + ) -> Callable[ + [document_processor_service.EvaluateProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EvaluateProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_processor_types( + self, + ) -> Callable[ + [document_processor_service.FetchProcessorTypesRequest], + document_processor_service.FetchProcessorTypesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchProcessorTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_evaluation( + self, + ) -> Callable[ + [document_processor_service.GetEvaluationRequest], evaluation.Evaluation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEvaluation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor( + self, + ) -> Callable[ + [document_processor_service.GetProcessorRequest], processor.Processor + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessor(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor_type( + self, + ) -> Callable[ + [document_processor_service.GetProcessorTypeRequest], + processor_type.ProcessorType, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessorType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_processor_version( + self, + ) -> Callable[ + [document_processor_service.GetProcessorVersionRequest], + processor.ProcessorVersion, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_evaluations( + self, + ) -> Callable[ + [document_processor_service.ListEvaluationsRequest], + document_processor_service.ListEvaluationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEvaluations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processors( + self, + ) -> Callable[ + [document_processor_service.ListProcessorsRequest], + document_processor_service.ListProcessorsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessors(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processor_types( + self, + ) -> Callable[ + [document_processor_service.ListProcessorTypesRequest], + document_processor_service.ListProcessorTypesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessorTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_processor_versions( + self, + ) -> Callable[ + [document_processor_service.ListProcessorVersionsRequest], + document_processor_service.ListProcessorVersionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProcessorVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def process_document( + self, + ) -> Callable[ + [document_processor_service.ProcessRequest], + document_processor_service.ProcessResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProcessDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def review_document( + self, + ) -> Callable[ + [document_processor_service.ReviewDocumentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReviewDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_default_processor_version( + self, + ) -> Callable[ + [document_processor_service.SetDefaultProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetDefaultProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def train_processor_version( + self, + ) -> Callable[ + [document_processor_service.TrainProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def undeploy_processor_version( + self, + ) -> Callable[ + [document_processor_service.UndeployProcessorVersionRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UndeployProcessorVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DocumentProcessorServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*}/locations", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DocumentProcessorServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta3/{name=projects/*/locations/*/operations}", + }, + { + "method": "get", + "uri": "/uiv1beta3/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DocumentProcessorServiceRestTransport",) diff --git a/tests/unit/gapic/documentai_v1/test_document_processor_service.py b/tests/unit/gapic/documentai_v1/test_document_processor_service.py index 1149b355..3c1469a4 100644 --- a/tests/unit/gapic/documentai_v1/test_document_processor_service.py +++ b/tests/unit/gapic/documentai_v1/test_document_processor_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api import launch_stage_pb2 # type: ignore @@ -47,6 +49,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -60,6 +63,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.documentai_v1.services.document_processor_service import ( DocumentProcessorServiceAsyncClient, @@ -130,6 +135,7 @@ def test__get_default_mtls_endpoint(): [ (DocumentProcessorServiceClient, "grpc"), (DocumentProcessorServiceAsyncClient, "grpc_asyncio"), + (DocumentProcessorServiceClient, "rest"), ], ) def test_document_processor_service_client_from_service_account_info( @@ -145,7 +151,11 @@ def test_document_processor_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) @pytest.mark.parametrize( @@ -153,6 +163,7 @@ def test_document_processor_service_client_from_service_account_info( [ (transports.DocumentProcessorServiceGrpcTransport, "grpc"), (transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentProcessorServiceRestTransport, "rest"), ], ) def test_document_processor_service_client_service_account_always_use_jwt( @@ -178,6 +189,7 @@ def test_document_processor_service_client_service_account_always_use_jwt( [ (DocumentProcessorServiceClient, "grpc"), (DocumentProcessorServiceAsyncClient, "grpc_asyncio"), + (DocumentProcessorServiceClient, "rest"), ], ) def test_document_processor_service_client_from_service_account_file( @@ -200,13 +212,18 @@ def test_document_processor_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) def test_document_processor_service_client_get_transport_class(): transport = DocumentProcessorServiceClient.get_transport_class() available_transports = [ transports.DocumentProcessorServiceGrpcTransport, + transports.DocumentProcessorServiceRestTransport, ] assert transport in available_transports @@ -227,6 +244,11 @@ def test_document_processor_service_client_get_transport_class(): transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -386,6 +408,18 @@ def test_document_processor_service_client_client_options( "grpc_asyncio", "false", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + "true", + ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -592,6 +626,11 @@ def test_document_processor_service_client_get_mtls_endpoint_and_cert_source( transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + ), ], ) def test_document_processor_service_client_client_options_scopes( @@ -632,6 +671,12 @@ def test_document_processor_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + None, + ), ], ) def test_document_processor_service_client_client_options_credentials_file( @@ -5446,145 +5491,5179 @@ async def test_review_document_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ProcessRequest, + dict, + ], +) +def test_process_document_rest(request_type): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ProcessResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.process_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_processor_service.ProcessResponse) + + +def test_process_document_rest_required_fields( + request_type=document_processor_service.ProcessRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).process_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).process_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ProcessResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.process_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_process_document_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + unset_fields = transport.process_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_process_document_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options=options, - transport=transport, + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_process_document" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_process_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ProcessRequest.pb( + document_processor_service.ProcessRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_processor_service.ProcessResponse.to_json( + document_processor_service.ProcessResponse() ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + request = document_processor_service.ProcessRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ProcessResponse() + + client.process_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide scopes and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_process_document_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.ProcessRequest +): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.process_document(request) + + +def test_process_document_rest_flattened(): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DocumentProcessorServiceClient(transport=transport) - assert client.transport is transport + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ProcessResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.process_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*}:process" + % client.transport._host, + args[1], + ) + + +def test_process_document_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.DocumentProcessorServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.process_document( + document_processor_service.ProcessRequest(), + name="name_value", + ) + + +def test_process_document_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - channel = transport.grpc_channel - assert channel @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DocumentProcessorServiceGrpcTransport, - transports.DocumentProcessorServiceGrpcAsyncIOTransport, + document_processor_service.BatchProcessRequest, + dict, ], ) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_batch_process_documents_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = DocumentProcessorServiceClient.get_transport_class(transport_name)( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_process_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_process_documents_rest_required_fields( + request_type=document_processor_service.BatchProcessRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_process_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_process_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_process_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_process_documents_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - assert transport.kind == transport_name + unset_fields = transport.batch_process_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_process_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_batch_process_documents", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_batch_process_documents", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.BatchProcessRequest.pb( + document_processor_service.BatchProcessRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.BatchProcessRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.batch_process_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_process_documents_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.BatchProcessRequest +): client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert isinstance( - client.transport, - transports.DocumentProcessorServiceGrpcTransport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_process_documents(request) + + +def test_batch_process_documents_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -def test_document_processor_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DocumentProcessorServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_document_processor_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.documentai_v1.services.document_processor_service.transports.DocumentProcessorServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DocumentProcessorServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.batch_process_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*}:batchProcess" + % client.transport._host, + args[1], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "process_document", - "batch_process_documents", + +def test_batch_process_documents_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_process_documents( + document_processor_service.BatchProcessRequest(), + name="name_value", + ) + + +def test_batch_process_documents_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.FetchProcessorTypesRequest, + dict, + ], +) +def test_fetch_processor_types_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_processor_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_processor_service.FetchProcessorTypesResponse) + + +def test_fetch_processor_types_rest_required_fields( + request_type=document_processor_service.FetchProcessorTypesRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_processor_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_processor_types_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_processor_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_processor_types_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_fetch_processor_types" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_fetch_processor_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.FetchProcessorTypesRequest.pb( + document_processor_service.FetchProcessorTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.FetchProcessorTypesResponse.to_json( + document_processor_service.FetchProcessorTypesResponse() + ) + ) + + request = document_processor_service.FetchProcessorTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.FetchProcessorTypesResponse() + + client.fetch_processor_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_processor_types_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.FetchProcessorTypesRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_processor_types(request) + + +def test_fetch_processor_types_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_processor_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}:fetchProcessorTypes" + % client.transport._host, + args[1], + ) + + +def test_fetch_processor_types_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_processor_types( + document_processor_service.FetchProcessorTypesRequest(), + parent="parent_value", + ) + + +def test_fetch_processor_types_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorTypesRequest, + dict, + ], +) +def test_list_processor_types_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processor_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorTypesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processor_types_rest_required_fields( + request_type=document_processor_service.ListProcessorTypesRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processor_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processor_types_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processor_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processor_types_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_types" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorTypesRequest.pb( + document_processor_service.ListProcessorTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorTypesResponse.to_json( + document_processor_service.ListProcessorTypesResponse() + ) + ) + + request = document_processor_service.ListProcessorTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorTypesResponse() + + client.list_processor_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processor_types_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorTypesRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processor_types(request) + + +def test_list_processor_types_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processor_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/processorTypes" + % client.transport._host, + args[1], + ) + + +def test_list_processor_types_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processor_types( + document_processor_service.ListProcessorTypesRequest(), + parent="parent_value", + ) + + +def test_list_processor_types_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + processor_type.ProcessorType(), + processor_type.ProcessorType(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[], + next_page_token="def", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + processor_type.ProcessorType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorTypesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_processor_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor_type.ProcessorType) for i in results) + + pages = list(client.list_processor_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorTypeRequest, + dict, + ], +) +def test_get_processor_type_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processorTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType( + name="name_value", + type_="type__value", + category="category_value", + allow_creation=True, + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + sample_document_uris=["sample_document_uris_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor_type.ProcessorType) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.category == "category_value" + assert response.allow_creation is True + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.sample_document_uris == ["sample_document_uris_value"] + + +def test_get_processor_type_rest_required_fields( + request_type=document_processor_service.GetProcessorTypeRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_type_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_type_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_type" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorTypeRequest.pb( + document_processor_service.GetProcessorTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor_type.ProcessorType.to_json( + processor_type.ProcessorType() + ) + + request = document_processor_service.GetProcessorTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor_type.ProcessorType() + + client.get_processor_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_type_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.GetProcessorTypeRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processorTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor_type(request) + + +def test_get_processor_type_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processorTypes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processorTypes/*}" + % client.transport._host, + args[1], + ) + + +def test_get_processor_type_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor_type( + document_processor_service.GetProcessorTypeRequest(), + name="name_value", + ) + + +def test_get_processor_type_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorsRequest, + dict, + ], +) +def test_list_processors_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processors_rest_required_fields( + request_type=document_processor_service.ListProcessorsRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processors_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processors._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processors_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_list_processors" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_list_processors" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorsRequest.pb( + document_processor_service.ListProcessorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorsResponse.to_json( + document_processor_service.ListProcessorsResponse() + ) + ) + + request = document_processor_service.ListProcessorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorsResponse() + + client.list_processors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processors_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorsRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processors(request) + + +def test_list_processors_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/processors" % client.transport._host, + args[1], + ) + + +def test_list_processors_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processors( + document_processor_service.ListProcessorsRequest(), + parent="parent_value", + ) + + +def test_list_processors_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + processor.Processor(), + processor.Processor(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorsResponse( + processors=[], + next_page_token="def", + ), + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + processor.Processor(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_processors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor.Processor) for i in results) + + pages = list(client.list_processors(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorRequest, + dict, + ], +) +def test_get_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.Processor( + name="name_value", + type_="type__value", + display_name="display_name_value", + state=processor.Processor.State.ENABLED, + default_processor_version="default_processor_version_value", + process_endpoint="process_endpoint_value", + kms_key_name="kms_key_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor.Processor) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.state == processor.Processor.State.ENABLED + assert response.default_processor_version == "default_processor_version_value" + assert response.process_endpoint == "process_endpoint_value" + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_processor_rest_required_fields( + request_type=document_processor_service.GetProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor.Processor() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorRequest.pb( + document_processor_service.GetProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor.Processor.to_json(processor.Processor()) + + request = document_processor_service.GetProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor.Processor() + + client.get_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.GetProcessorRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor(request) + + +def test_get_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.Processor() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*}" % client.transport._host, + args[1], + ) + + +def test_get_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor( + document_processor_service.GetProcessorRequest(), + name="name_value", + ) + + +def test_get_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorVersionRequest, + dict, + ], +) +def test_get_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion( + name="name_value", + display_name="display_name_value", + state=processor.ProcessorVersion.State.DEPLOYED, + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + google_managed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor_version(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor.ProcessorVersion) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == processor.ProcessorVersion.State.DEPLOYED + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.google_managed is True + + +def test_get_processor_version_rest_required_fields( + request_type=document_processor_service.GetProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_version" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorVersionRequest.pb( + document_processor_service.GetProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor.ProcessorVersion.to_json( + processor.ProcessorVersion() + ) + + request = document_processor_service.GetProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor.ProcessorVersion() + + client.get_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.GetProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor_version(request) + + +def test_get_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor_version( + document_processor_service.GetProcessorVersionRequest(), + name="name_value", + ) + + +def test_get_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorVersionsRequest, + dict, + ], +) +def test_list_processor_versions_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processor_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processor_versions_rest_required_fields( + request_type=document_processor_service.ListProcessorVersionsRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processor_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processor_versions_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processor_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processor_versions_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_versions", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_list_processor_versions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorVersionsRequest.pb( + document_processor_service.ListProcessorVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorVersionsResponse.to_json( + document_processor_service.ListProcessorVersionsResponse() + ) + ) + + request = document_processor_service.ListProcessorVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorVersionsResponse() + + client.list_processor_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processor_versions_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorVersionsRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processor_versions(request) + + +def test_list_processor_versions_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processor_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/processors/*}/processorVersions" + % client.transport._host, + args[1], + ) + + +def test_list_processor_versions_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processor_versions( + document_processor_service.ListProcessorVersionsRequest(), + parent="parent_value", + ) + + +def test_list_processor_versions_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + processor.ProcessorVersion(), + processor.ProcessorVersion(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[], + next_page_token="def", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + processor.ProcessorVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorVersionsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3" + } + + pager = client.list_processor_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor.ProcessorVersion) for i in results) + + pages = list(client.list_processor_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeleteProcessorVersionRequest, + dict, + ], +) +def test_delete_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_processor_version_rest_required_fields( + request_type=document_processor_service.DeleteProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_delete_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeleteProcessorVersionRequest.pb( + document_processor_service.DeleteProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeleteProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeleteProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_processor_version(request) + + +def test_delete_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_processor_version( + document_processor_service.DeleteProcessorVersionRequest(), + name="name_value", + ) + + +def test_delete_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeployProcessorVersionRequest, + dict, + ], +) +def test_deploy_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.deploy_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_deploy_processor_version_rest_required_fields( + request_type=document_processor_service.DeployProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.deploy_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_deploy_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.deploy_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deploy_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_deploy_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_deploy_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeployProcessorVersionRequest.pb( + document_processor_service.DeployProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeployProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.deploy_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deploy_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeployProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deploy_processor_version(request) + + +def test_deploy_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.deploy_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:deploy" + % client.transport._host, + args[1], + ) + + +def test_deploy_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deploy_processor_version( + document_processor_service.DeployProcessorVersionRequest(), + name="name_value", + ) + + +def test_deploy_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.UndeployProcessorVersionRequest, + dict, + ], +) +def test_undeploy_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.undeploy_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_undeploy_processor_version_rest_required_fields( + request_type=document_processor_service.UndeployProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undeploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undeploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.undeploy_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_undeploy_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.undeploy_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undeploy_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_undeploy_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_undeploy_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.UndeployProcessorVersionRequest.pb( + document_processor_service.UndeployProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.UndeployProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.undeploy_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_undeploy_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.UndeployProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.undeploy_processor_version(request) + + +def test_undeploy_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.undeploy_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*/processorVersions/*}:undeploy" + % client.transport._host, + args[1], + ) + + +def test_undeploy_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undeploy_processor_version( + document_processor_service.UndeployProcessorVersionRequest(), + name="name_value", + ) + + +def test_undeploy_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.CreateProcessorRequest, + dict, + ], +) +def test_create_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["processor"] = { + "name": "name_value", + "type_": "type__value", + "display_name": "display_name_value", + "state": 1, + "default_processor_version": "default_processor_version_value", + "process_endpoint": "process_endpoint_value", + "create_time": {"seconds": 751, "nanos": 543}, + "kms_key_name": "kms_key_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor( + name="name_value", + type_="type__value", + display_name="display_name_value", + state=gcd_processor.Processor.State.ENABLED, + default_processor_version="default_processor_version_value", + process_endpoint="process_endpoint_value", + kms_key_name="kms_key_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_processor(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_processor.Processor) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.state == gcd_processor.Processor.State.ENABLED + assert response.default_processor_version == "default_processor_version_value" + assert response.process_endpoint == "process_endpoint_value" + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_processor_rest_required_fields( + request_type=document_processor_service.CreateProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_processor._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "processor", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_create_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_create_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.CreateProcessorRequest.pb( + document_processor_service.CreateProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_processor.Processor.to_json( + gcd_processor.Processor() + ) + + request = document_processor_service.CreateProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_processor.Processor() + + client.create_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.CreateProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["processor"] = { + "name": "name_value", + "type_": "type__value", + "display_name": "display_name_value", + "state": 1, + "default_processor_version": "default_processor_version_value", + "process_endpoint": "process_endpoint_value", + "create_time": {"seconds": 751, "nanos": 543}, + "kms_key_name": "kms_key_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_processor(request) + + +def test_create_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + processor=gcd_processor.Processor(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/processors" % client.transport._host, + args[1], + ) + + +def test_create_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_processor( + document_processor_service.CreateProcessorRequest(), + parent="parent_value", + processor=gcd_processor.Processor(name="name_value"), + ) + + +def test_create_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeleteProcessorRequest, + dict, + ], +) +def test_delete_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_processor_rest_required_fields( + request_type=document_processor_service.DeleteProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeleteProcessorRequest.pb( + document_processor_service.DeleteProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeleteProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeleteProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_processor(request) + + +def test_delete_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/processors/*}" % client.transport._host, + args[1], + ) + + +def test_delete_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_processor( + document_processor_service.DeleteProcessorRequest(), + name="name_value", + ) + + +def test_delete_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.EnableProcessorRequest, + dict, + ], +) +def test_enable_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_processor_rest_required_fields( + request_type=document_processor_service.EnableProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_enable_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_enable_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.EnableProcessorRequest.pb( + document_processor_service.EnableProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.EnableProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.EnableProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_processor(request) + + +def test_enable_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DisableProcessorRequest, + dict, + ], +) +def test_disable_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_processor_rest_required_fields( + request_type=document_processor_service.DisableProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_disable_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_disable_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DisableProcessorRequest.pb( + document_processor_service.DisableProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DisableProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DisableProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_processor(request) + + +def test_disable_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.SetDefaultProcessorVersionRequest, + dict, + ], +) +def test_set_default_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "processor": "projects/sample1/locations/sample2/processors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_default_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_set_default_processor_version_rest_required_fields( + request_type=document_processor_service.SetDefaultProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["processor"] = "" + request_init["default_processor_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_default_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["processor"] = "processor_value" + jsonified_request["defaultProcessorVersion"] = "default_processor_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_default_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "processor" in jsonified_request + assert jsonified_request["processor"] == "processor_value" + assert "defaultProcessorVersion" in jsonified_request + assert ( + jsonified_request["defaultProcessorVersion"] + == "default_processor_version_value" + ) + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_default_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_default_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_default_processor_version._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "processor", + "defaultProcessorVersion", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_default_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_set_default_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_set_default_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.SetDefaultProcessorVersionRequest.pb( + document_processor_service.SetDefaultProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.SetDefaultProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.set_default_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_default_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.SetDefaultProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "processor": "projects/sample1/locations/sample2/processors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_default_processor_version(request) + + +def test_set_default_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ReviewDocumentRequest, + dict, + ], +) +def test_review_document_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.review_document(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_review_document_rest_required_fields( + request_type=document_processor_service.ReviewDocumentRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["human_review_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).review_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["humanReviewConfig"] = "human_review_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).review_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "humanReviewConfig" in jsonified_request + assert jsonified_request["humanReviewConfig"] == "human_review_config_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.review_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_review_document_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.review_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("humanReviewConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_review_document_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_review_document" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_review_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ReviewDocumentRequest.pb( + document_processor_service.ReviewDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.ReviewDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.review_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_review_document_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ReviewDocumentRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.review_document(request) + + +def test_review_document_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + + # get truthy value for each flattened field + mock_args = dict( + human_review_config="human_review_config_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.review_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{human_review_config=projects/*/locations/*/processors/*/humanReviewConfig}:reviewDocument" + % client.transport._host, + args[1], + ) + + +def test_review_document_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.review_document( + document_processor_service.ReviewDocumentRequest(), + human_review_config="human_review_config_value", + ) + + +def test_review_document_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DocumentProcessorServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DocumentProcessorServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentProcessorServiceGrpcTransport, + transports.DocumentProcessorServiceGrpcAsyncIOTransport, + transports.DocumentProcessorServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DocumentProcessorServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DocumentProcessorServiceGrpcTransport, + ) + + +def test_document_processor_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DocumentProcessorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_document_processor_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.documentai_v1.services.document_processor_service.transports.DocumentProcessorServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DocumentProcessorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "process_document", + "batch_process_documents", "fetch_processor_types", "list_processor_types", "get_processor_type", @@ -5697,6 +10776,7 @@ def test_document_processor_service_transport_auth_adc(transport_class): [ transports.DocumentProcessorServiceGrpcTransport, transports.DocumentProcessorServiceGrpcAsyncIOTransport, + transports.DocumentProcessorServiceRestTransport, ], ) def test_document_processor_service_transport_auth_gdch_credentials(transport_class): @@ -5798,11 +10878,40 @@ def test_document_processor_service_grpc_transport_client_cert_source_for_mtls( ) +def test_document_processor_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentProcessorServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_document_processor_service_rest_lro_client(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_processor_service_host_no_port(transport_name): @@ -5813,7 +10922,11 @@ def test_document_processor_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) @pytest.mark.parametrize( @@ -5821,6 +10934,7 @@ def test_document_processor_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_processor_service_host_with_port(transport_name): @@ -5831,7 +10945,84 @@ def test_document_processor_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("documentai.googleapis.com:8000") + assert client.transport._host == ( + "documentai.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_processor_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentProcessorServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentProcessorServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.process_document._session + session2 = client2.transport.process_document._session + assert session1 != session2 + session1 = client1.transport.batch_process_documents._session + session2 = client2.transport.batch_process_documents._session + assert session1 != session2 + session1 = client1.transport.fetch_processor_types._session + session2 = client2.transport.fetch_processor_types._session + assert session1 != session2 + session1 = client1.transport.list_processor_types._session + session2 = client2.transport.list_processor_types._session + assert session1 != session2 + session1 = client1.transport.get_processor_type._session + session2 = client2.transport.get_processor_type._session + assert session1 != session2 + session1 = client1.transport.list_processors._session + session2 = client2.transport.list_processors._session + assert session1 != session2 + session1 = client1.transport.get_processor._session + session2 = client2.transport.get_processor._session + assert session1 != session2 + session1 = client1.transport.get_processor_version._session + session2 = client2.transport.get_processor_version._session + assert session1 != session2 + session1 = client1.transport.list_processor_versions._session + session2 = client2.transport.list_processor_versions._session + assert session1 != session2 + session1 = client1.transport.delete_processor_version._session + session2 = client2.transport.delete_processor_version._session + assert session1 != session2 + session1 = client1.transport.deploy_processor_version._session + session2 = client2.transport.deploy_processor_version._session + assert session1 != session2 + session1 = client1.transport.undeploy_processor_version._session + session2 = client2.transport.undeploy_processor_version._session + assert session1 != session2 + session1 = client1.transport.create_processor._session + session2 = client2.transport.create_processor._session + assert session1 != session2 + session1 = client1.transport.delete_processor._session + session2 = client2.transport.delete_processor._session + assert session1 != session2 + session1 = client1.transport.enable_processor._session + session2 = client2.transport.enable_processor._session + assert session1 != session2 + session1 = client1.transport.disable_processor._session + session2 = client2.transport.disable_processor._session + assert session1 != session2 + session1 = client1.transport.set_default_processor_version._session + session2 = client2.transport.set_default_processor_version._session + assert session1 != session2 + session1 = client1.transport.review_document._session + session2 = client2.transport.review_document._session + assert session1 != session2 def test_document_processor_service_grpc_transport_channel(): @@ -6247,6 +11438,294 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_cancel_operation(transport: str = "grpc"): client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6968,6 +12447,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -6985,6 +12465,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py b/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py index be4950f3..6e61b4e7 100644 --- a/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py +++ b/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api import launch_stage_pb2 # type: ignore @@ -47,6 +49,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -60,6 +63,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.documentai_v1beta3.services.document_processor_service import ( DocumentProcessorServiceAsyncClient, @@ -131,6 +136,7 @@ def test__get_default_mtls_endpoint(): [ (DocumentProcessorServiceClient, "grpc"), (DocumentProcessorServiceAsyncClient, "grpc_asyncio"), + (DocumentProcessorServiceClient, "rest"), ], ) def test_document_processor_service_client_from_service_account_info( @@ -146,7 +152,11 @@ def test_document_processor_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) @pytest.mark.parametrize( @@ -154,6 +164,7 @@ def test_document_processor_service_client_from_service_account_info( [ (transports.DocumentProcessorServiceGrpcTransport, "grpc"), (transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DocumentProcessorServiceRestTransport, "rest"), ], ) def test_document_processor_service_client_service_account_always_use_jwt( @@ -179,6 +190,7 @@ def test_document_processor_service_client_service_account_always_use_jwt( [ (DocumentProcessorServiceClient, "grpc"), (DocumentProcessorServiceAsyncClient, "grpc_asyncio"), + (DocumentProcessorServiceClient, "rest"), ], ) def test_document_processor_service_client_from_service_account_file( @@ -201,13 +213,18 @@ def test_document_processor_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) def test_document_processor_service_client_get_transport_class(): transport = DocumentProcessorServiceClient.get_transport_class() available_transports = [ transports.DocumentProcessorServiceGrpcTransport, + transports.DocumentProcessorServiceRestTransport, ] assert transport in available_transports @@ -228,6 +245,11 @@ def test_document_processor_service_client_get_transport_class(): transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -387,6 +409,18 @@ def test_document_processor_service_client_client_options( "grpc_asyncio", "false", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + "true", + ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -593,6 +627,11 @@ def test_document_processor_service_client_get_mtls_endpoint_and_cert_source( transports.DocumentProcessorServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + ), ], ) def test_document_processor_service_client_client_options_scopes( @@ -633,6 +672,12 @@ def test_document_processor_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + DocumentProcessorServiceClient, + transports.DocumentProcessorServiceRestTransport, + "rest", + None, + ), ], ) def test_document_processor_service_client_client_options_credentials_file( @@ -6607,148 +6652,6378 @@ async def test_list_evaluations_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ProcessRequest, + dict, + ], +) +def test_process_document_rest(request_type): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse( + human_review_operation="human_review_operation_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ProcessResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.process_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_processor_service.ProcessResponse) + assert response.human_review_operation == "human_review_operation_value" + + +def test_process_document_rest_required_fields( + request_type=document_processor_service.ProcessRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).process_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).process_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ProcessResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.process_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_process_document_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.process_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_process_document_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options=options, - transport=transport, + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_process_document" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_process_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ProcessRequest.pb( + document_processor_service.ProcessRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_processor_service.ProcessResponse.to_json( + document_processor_service.ProcessResponse() ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + request = document_processor_service.ProcessRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ProcessResponse() + + client.process_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide scopes and a transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_process_document_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.ProcessRequest +): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DocumentProcessorServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.process_document(request) + + +def test_process_document_rest_flattened(): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DocumentProcessorServiceClient(transport=transport) - assert client.transport is transport + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ProcessResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DocumentProcessorServiceGrpcTransport( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ProcessResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.process_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*}:process" + % client.transport._host, + args[1], + ) + + +def test_process_document_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.DocumentProcessorServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.process_document( + document_processor_service.ProcessRequest(), + name="name_value", + ) + + +def test_process_document_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - channel = transport.grpc_channel - assert channel @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DocumentProcessorServiceGrpcTransport, - transports.DocumentProcessorServiceGrpcAsyncIOTransport, + document_processor_service.BatchProcessRequest, + dict, ], ) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_batch_process_documents_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = DocumentProcessorServiceClient.get_transport_class(transport_name)( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_process_documents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_batch_process_documents_rest_required_fields( + request_type=document_processor_service.BatchProcessRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_process_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_process_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_process_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_process_documents_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_process_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_process_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), ) - assert transport.kind == transport_name + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_batch_process_documents", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_batch_process_documents", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.BatchProcessRequest.pb( + document_processor_service.BatchProcessRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + request = document_processor_service.BatchProcessRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + client.batch_process_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_process_documents_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.BatchProcessRequest +): client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert isinstance( - client.transport, - transports.DocumentProcessorServiceGrpcTransport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_process_documents(request) + + +def test_batch_process_documents_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -def test_document_processor_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DocumentProcessorServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -def test_document_processor_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.documentai_v1beta3.services.document_processor_service.transports.DocumentProcessorServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DocumentProcessorServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.batch_process_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*}:batchProcess" + % client.transport._host, + args[1], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "process_document", - "batch_process_documents", - "fetch_processor_types", - "list_processor_types", - "get_processor_type", + +def test_batch_process_documents_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_process_documents( + document_processor_service.BatchProcessRequest(), + name="name_value", + ) + + +def test_batch_process_documents_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.FetchProcessorTypesRequest, + dict, + ], +) +def test_fetch_processor_types_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_processor_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document_processor_service.FetchProcessorTypesResponse) + + +def test_fetch_processor_types_rest_required_fields( + request_type=document_processor_service.FetchProcessorTypesRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_processor_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_processor_types_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_processor_types._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_processor_types_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_fetch_processor_types" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_fetch_processor_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.FetchProcessorTypesRequest.pb( + document_processor_service.FetchProcessorTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.FetchProcessorTypesResponse.to_json( + document_processor_service.FetchProcessorTypesResponse() + ) + ) + + request = document_processor_service.FetchProcessorTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.FetchProcessorTypesResponse() + + client.fetch_processor_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_processor_types_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.FetchProcessorTypesRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_processor_types(request) + + +def test_fetch_processor_types_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.FetchProcessorTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.FetchProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_processor_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*}:fetchProcessorTypes" + % client.transport._host, + args[1], + ) + + +def test_fetch_processor_types_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_processor_types( + document_processor_service.FetchProcessorTypesRequest(), + parent="parent_value", + ) + + +def test_fetch_processor_types_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorTypesRequest, + dict, + ], +) +def test_list_processor_types_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processor_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorTypesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processor_types_rest_required_fields( + request_type=document_processor_service.ListProcessorTypesRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processor_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processor_types_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processor_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processor_types_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_types" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorTypesRequest.pb( + document_processor_service.ListProcessorTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorTypesResponse.to_json( + document_processor_service.ListProcessorTypesResponse() + ) + ) + + request = document_processor_service.ListProcessorTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorTypesResponse() + + client.list_processor_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processor_types_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorTypesRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processor_types(request) + + +def test_list_processor_types_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorTypesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processor_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*}/processorTypes" + % client.transport._host, + args[1], + ) + + +def test_list_processor_types_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processor_types( + document_processor_service.ListProcessorTypesRequest(), + parent="parent_value", + ) + + +def test_list_processor_types_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + processor_type.ProcessorType(), + processor_type.ProcessorType(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[], + next_page_token="def", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorTypesResponse( + processor_types=[ + processor_type.ProcessorType(), + processor_type.ProcessorType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorTypesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_processor_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor_type.ProcessorType) for i in results) + + pages = list(client.list_processor_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorTypeRequest, + dict, + ], +) +def test_get_processor_type_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processorTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType( + name="name_value", + type_="type__value", + category="category_value", + allow_creation=True, + launch_stage=launch_stage_pb2.LaunchStage.UNIMPLEMENTED, + sample_document_uris=["sample_document_uris_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor_type.ProcessorType) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.category == "category_value" + assert response.allow_creation is True + assert response.launch_stage == launch_stage_pb2.LaunchStage.UNIMPLEMENTED + assert response.sample_document_uris == ["sample_document_uris_value"] + + +def test_get_processor_type_rest_required_fields( + request_type=document_processor_service.GetProcessorTypeRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_type_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_type_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_type" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorTypeRequest.pb( + document_processor_service.GetProcessorTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor_type.ProcessorType.to_json( + processor_type.ProcessorType() + ) + + request = document_processor_service.GetProcessorTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor_type.ProcessorType() + + client.get_processor_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_type_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.GetProcessorTypeRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processorTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor_type(request) + + +def test_get_processor_type_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor_type.ProcessorType() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processorTypes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor_type.ProcessorType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processorTypes/*}" + % client.transport._host, + args[1], + ) + + +def test_get_processor_type_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor_type( + document_processor_service.GetProcessorTypeRequest(), + name="name_value", + ) + + +def test_get_processor_type_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorsRequest, + dict, + ], +) +def test_list_processors_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processors_rest_required_fields( + request_type=document_processor_service.ListProcessorsRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processors_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processors._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processors_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_list_processors" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_list_processors" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorsRequest.pb( + document_processor_service.ListProcessorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorsResponse.to_json( + document_processor_service.ListProcessorsResponse() + ) + ) + + request = document_processor_service.ListProcessorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorsResponse() + + client.list_processors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processors_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorsRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processors(request) + + +def test_list_processors_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*}/processors" + % client.transport._host, + args[1], + ) + + +def test_list_processors_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processors( + document_processor_service.ListProcessorsRequest(), + parent="parent_value", + ) + + +def test_list_processors_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + processor.Processor(), + processor.Processor(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorsResponse( + processors=[], + next_page_token="def", + ), + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorsResponse( + processors=[ + processor.Processor(), + processor.Processor(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_processors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor.Processor) for i in results) + + pages = list(client.list_processors(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorRequest, + dict, + ], +) +def test_get_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.Processor( + name="name_value", + type_="type__value", + display_name="display_name_value", + state=processor.Processor.State.ENABLED, + default_processor_version="default_processor_version_value", + process_endpoint="process_endpoint_value", + kms_key_name="kms_key_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor.Processor) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.state == processor.Processor.State.ENABLED + assert response.default_processor_version == "default_processor_version_value" + assert response.process_endpoint == "process_endpoint_value" + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_processor_rest_required_fields( + request_type=document_processor_service.GetProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor.Processor() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorRequest.pb( + document_processor_service.GetProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor.Processor.to_json(processor.Processor()) + + request = document_processor_service.GetProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor.Processor() + + client.get_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_rest_bad_request( + transport: str = "rest", request_type=document_processor_service.GetProcessorRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor(request) + + +def test_get_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.Processor() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*}" + % client.transport._host, + args[1], + ) + + +def test_get_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor( + document_processor_service.GetProcessorRequest(), + name="name_value", + ) + + +def test_get_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.TrainProcessorVersionRequest, + dict, + ], +) +def test_train_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_processor_version_rest_required_fields( + request_type=document_processor_service.TrainProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "processorVersion", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_train_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_train_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.TrainProcessorVersionRequest.pb( + document_processor_service.TrainProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.TrainProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.TrainProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_processor_version(request) + + +def test_train_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + processor_version=processor.ProcessorVersion(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.train_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*/processors/*}/processorVersions:train" + % client.transport._host, + args[1], + ) + + +def test_train_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.train_processor_version( + document_processor_service.TrainProcessorVersionRequest(), + parent="parent_value", + processor_version=processor.ProcessorVersion(name="name_value"), + ) + + +def test_train_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetProcessorVersionRequest, + dict, + ], +) +def test_get_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion( + name="name_value", + display_name="display_name_value", + state=processor.ProcessorVersion.State.DEPLOYED, + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + google_managed=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_processor_version(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, processor.ProcessorVersion) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == processor.ProcessorVersion.State.DEPLOYED + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.google_managed is True + + +def test_get_processor_version_rest_required_fields( + request_type=document_processor_service.GetProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_version" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetProcessorVersionRequest.pb( + document_processor_service.GetProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = processor.ProcessorVersion.to_json( + processor.ProcessorVersion() + ) + + request = document_processor_service.GetProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = processor.ProcessorVersion() + + client.get_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.GetProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_processor_version(request) + + +def test_get_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = processor.ProcessorVersion() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = processor.ProcessorVersion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processor_version( + document_processor_service.GetProcessorVersionRequest(), + name="name_value", + ) + + +def test_get_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListProcessorVersionsRequest, + dict, + ], +) +def test_list_processor_versions_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_processor_versions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProcessorVersionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_processor_versions_rest_required_fields( + request_type=document_processor_service.ListProcessorVersionsRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_processor_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_processor_versions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_processor_versions_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_processor_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_processor_versions_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_versions", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_list_processor_versions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListProcessorVersionsRequest.pb( + document_processor_service.ListProcessorVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListProcessorVersionsResponse.to_json( + document_processor_service.ListProcessorVersionsResponse() + ) + ) + + request = document_processor_service.ListProcessorVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListProcessorVersionsResponse() + + client.list_processor_versions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_processor_versions_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListProcessorVersionsRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_processor_versions(request) + + +def test_list_processor_versions_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListProcessorVersionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListProcessorVersionsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_processor_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*/processors/*}/processorVersions" + % client.transport._host, + args[1], + ) + + +def test_list_processor_versions_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_processor_versions( + document_processor_service.ListProcessorVersionsRequest(), + parent="parent_value", + ) + + +def test_list_processor_versions_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + processor.ProcessorVersion(), + processor.ProcessorVersion(), + ], + next_page_token="abc", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[], + next_page_token="def", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + ], + next_page_token="ghi", + ), + document_processor_service.ListProcessorVersionsResponse( + processor_versions=[ + processor.ProcessorVersion(), + processor.ProcessorVersion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListProcessorVersionsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3" + } + + pager = client.list_processor_versions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, processor.ProcessorVersion) for i in results) + + pages = list(client.list_processor_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeleteProcessorVersionRequest, + dict, + ], +) +def test_delete_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_processor_version_rest_required_fields( + request_type=document_processor_service.DeleteProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_delete_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeleteProcessorVersionRequest.pb( + document_processor_service.DeleteProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeleteProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeleteProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_processor_version(request) + + +def test_delete_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_processor_version( + document_processor_service.DeleteProcessorVersionRequest(), + name="name_value", + ) + + +def test_delete_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeployProcessorVersionRequest, + dict, + ], +) +def test_deploy_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.deploy_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_deploy_processor_version_rest_required_fields( + request_type=document_processor_service.DeployProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.deploy_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_deploy_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.deploy_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deploy_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_deploy_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_deploy_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeployProcessorVersionRequest.pb( + document_processor_service.DeployProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeployProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.deploy_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deploy_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeployProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deploy_processor_version(request) + + +def test_deploy_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.deploy_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:deploy" + % client.transport._host, + args[1], + ) + + +def test_deploy_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deploy_processor_version( + document_processor_service.DeployProcessorVersionRequest(), + name="name_value", + ) + + +def test_deploy_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.UndeployProcessorVersionRequest, + dict, + ], +) +def test_undeploy_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.undeploy_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_undeploy_processor_version_rest_required_fields( + request_type=document_processor_service.UndeployProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undeploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undeploy_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.undeploy_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_undeploy_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.undeploy_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undeploy_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_undeploy_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_undeploy_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.UndeployProcessorVersionRequest.pb( + document_processor_service.UndeployProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.UndeployProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.undeploy_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_undeploy_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.UndeployProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.undeploy_processor_version(request) + + +def test_undeploy_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.undeploy_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*}:undeploy" + % client.transport._host, + args[1], + ) + + +def test_undeploy_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undeploy_processor_version( + document_processor_service.UndeployProcessorVersionRequest(), + name="name_value", + ) + + +def test_undeploy_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.CreateProcessorRequest, + dict, + ], +) +def test_create_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["processor"] = { + "name": "name_value", + "type_": "type__value", + "display_name": "display_name_value", + "state": 1, + "default_processor_version": "default_processor_version_value", + "process_endpoint": "process_endpoint_value", + "create_time": {"seconds": 751, "nanos": 543}, + "kms_key_name": "kms_key_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor( + name="name_value", + type_="type__value", + display_name="display_name_value", + state=gcd_processor.Processor.State.ENABLED, + default_processor_version="default_processor_version_value", + process_endpoint="process_endpoint_value", + kms_key_name="kms_key_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_processor(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_processor.Processor) + assert response.name == "name_value" + assert response.type_ == "type__value" + assert response.display_name == "display_name_value" + assert response.state == gcd_processor.Processor.State.ENABLED + assert response.default_processor_version == "default_processor_version_value" + assert response.process_endpoint == "process_endpoint_value" + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_processor_rest_required_fields( + request_type=document_processor_service.CreateProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_processor._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "processor", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_create_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_create_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.CreateProcessorRequest.pb( + document_processor_service.CreateProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcd_processor.Processor.to_json( + gcd_processor.Processor() + ) + + request = document_processor_service.CreateProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_processor.Processor() + + client.create_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.CreateProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["processor"] = { + "name": "name_value", + "type_": "type__value", + "display_name": "display_name_value", + "state": 1, + "default_processor_version": "default_processor_version_value", + "process_endpoint": "process_endpoint_value", + "create_time": {"seconds": 751, "nanos": 543}, + "kms_key_name": "kms_key_name_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_processor(request) + + +def test_create_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_processor.Processor() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + processor=gcd_processor.Processor(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcd_processor.Processor.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*}/processors" + % client.transport._host, + args[1], + ) + + +def test_create_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_processor( + document_processor_service.CreateProcessorRequest(), + parent="parent_value", + processor=gcd_processor.Processor(name="name_value"), + ) + + +def test_create_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DeleteProcessorRequest, + dict, + ], +) +def test_delete_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_processor_rest_required_fields( + request_type=document_processor_service.DeleteProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DeleteProcessorRequest.pb( + document_processor_service.DeleteProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DeleteProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DeleteProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_processor(request) + + +def test_delete_processor_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_processor(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_processor_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_processor( + document_processor_service.DeleteProcessorRequest(), + name="name_value", + ) + + +def test_delete_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.EnableProcessorRequest, + dict, + ], +) +def test_enable_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_processor_rest_required_fields( + request_type=document_processor_service.EnableProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.enable_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_enable_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_enable_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.EnableProcessorRequest.pb( + document_processor_service.EnableProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.EnableProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.EnableProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_processor(request) + + +def test_enable_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.DisableProcessorRequest, + dict, + ], +) +def test_disable_processor_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_processor(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_processor_rest_required_fields( + request_type=document_processor_service.DisableProcessorRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_processor._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_processor(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_processor_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.disable_processor._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_processor_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_disable_processor" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_disable_processor" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.DisableProcessorRequest.pb( + document_processor_service.DisableProcessorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.DisableProcessorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_processor( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_processor_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.DisableProcessorRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/processors/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_processor(request) + + +def test_disable_processor_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.SetDefaultProcessorVersionRequest, + dict, + ], +) +def test_set_default_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "processor": "projects/sample1/locations/sample2/processors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_default_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_set_default_processor_version_rest_required_fields( + request_type=document_processor_service.SetDefaultProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["processor"] = "" + request_init["default_processor_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_default_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["processor"] = "processor_value" + jsonified_request["defaultProcessorVersion"] = "default_processor_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_default_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "processor" in jsonified_request + assert jsonified_request["processor"] == "processor_value" + assert "defaultProcessorVersion" in jsonified_request + assert ( + jsonified_request["defaultProcessorVersion"] + == "default_processor_version_value" + ) + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_default_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_default_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_default_processor_version._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "processor", + "defaultProcessorVersion", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_default_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_set_default_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_set_default_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.SetDefaultProcessorVersionRequest.pb( + document_processor_service.SetDefaultProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.SetDefaultProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.set_default_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_default_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.SetDefaultProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "processor": "projects/sample1/locations/sample2/processors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_default_processor_version(request) + + +def test_set_default_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ReviewDocumentRequest, + dict, + ], +) +def test_review_document_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.review_document(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_review_document_rest_required_fields( + request_type=document_processor_service.ReviewDocumentRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["human_review_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).review_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["humanReviewConfig"] = "human_review_config_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).review_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "humanReviewConfig" in jsonified_request + assert jsonified_request["humanReviewConfig"] == "human_review_config_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.review_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_review_document_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.review_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("humanReviewConfig",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_review_document_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_review_document" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_review_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ReviewDocumentRequest.pb( + document_processor_service.ReviewDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.ReviewDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.review_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_review_document_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ReviewDocumentRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.review_document(request) + + +def test_review_document_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "human_review_config": "projects/sample1/locations/sample2/processors/sample3/humanReviewConfig" + } + + # get truthy value for each flattened field + mock_args = dict( + human_review_config="human_review_config_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.review_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{human_review_config=projects/*/locations/*/processors/*/humanReviewConfig}:reviewDocument" + % client.transport._host, + args[1], + ) + + +def test_review_document_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.review_document( + document_processor_service.ReviewDocumentRequest(), + human_review_config="human_review_config_value", + ) + + +def test_review_document_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.EvaluateProcessorVersionRequest, + dict, + ], +) +def test_evaluate_processor_version_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "processor_version": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.evaluate_processor_version(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_evaluate_processor_version_rest_required_fields( + request_type=document_processor_service.EvaluateProcessorVersionRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["processor_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).evaluate_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["processorVersion"] = "processor_version_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).evaluate_processor_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "processorVersion" in jsonified_request + assert jsonified_request["processorVersion"] == "processor_version_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.evaluate_processor_version(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_evaluate_processor_version_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.evaluate_processor_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("processorVersion",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_evaluate_processor_version_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_evaluate_processor_version", + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "pre_evaluate_processor_version", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.EvaluateProcessorVersionRequest.pb( + document_processor_service.EvaluateProcessorVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = document_processor_service.EvaluateProcessorVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.evaluate_processor_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_evaluate_processor_version_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.EvaluateProcessorVersionRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "processor_version": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.evaluate_processor_version(request) + + +def test_evaluate_processor_version_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "processor_version": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + processor_version="processor_version_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.evaluate_processor_version(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{processor_version=projects/*/locations/*/processors/*/processorVersions/*}:evaluateProcessorVersion" + % client.transport._host, + args[1], + ) + + +def test_evaluate_processor_version_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.evaluate_processor_version( + document_processor_service.EvaluateProcessorVersionRequest(), + processor_version="processor_version_value", + ) + + +def test_evaluate_processor_version_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.GetEvaluationRequest, + dict, + ], +) +def test_get_evaluation_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4/evaluations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = evaluation.Evaluation( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = evaluation.Evaluation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_evaluation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation.Evaluation) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + + +def test_get_evaluation_rest_required_fields( + request_type=document_processor_service.GetEvaluationRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_evaluation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_evaluation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = evaluation.Evaluation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = evaluation.Evaluation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_evaluation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_evaluation_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_evaluation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_evaluation_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_get_evaluation" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_get_evaluation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.GetEvaluationRequest.pb( + document_processor_service.GetEvaluationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = evaluation.Evaluation.to_json( + evaluation.Evaluation() + ) + + request = document_processor_service.GetEvaluationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = evaluation.Evaluation() + + client.get_evaluation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_evaluation_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.GetEvaluationRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4/evaluations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_evaluation(request) + + +def test_get_evaluation_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = evaluation.Evaluation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4/evaluations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = evaluation.Evaluation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_evaluation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{name=projects/*/locations/*/processors/*/processorVersions/*/evaluations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_evaluation_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_evaluation( + document_processor_service.GetEvaluationRequest(), + name="name_value", + ) + + +def test_get_evaluation_rest_error(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_processor_service.ListEvaluationsRequest, + dict, + ], +) +def test_list_evaluations_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListEvaluationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListEvaluationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_evaluations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEvaluationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_evaluations_rest_required_fields( + request_type=document_processor_service.ListEvaluationsRequest, +): + transport_class = transports.DocumentProcessorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_evaluations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_evaluations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListEvaluationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_processor_service.ListEvaluationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_evaluations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_evaluations_rest_unset_required_fields(): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_evaluations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_evaluations_rest_interceptors(null_interceptor): + transport = transports.DocumentProcessorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentProcessorServiceRestInterceptor(), + ) + client = DocumentProcessorServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "post_list_evaluations" + ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, "pre_list_evaluations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_processor_service.ListEvaluationsRequest.pb( + document_processor_service.ListEvaluationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + document_processor_service.ListEvaluationsResponse.to_json( + document_processor_service.ListEvaluationsResponse() + ) + ) + + request = document_processor_service.ListEvaluationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_processor_service.ListEvaluationsResponse() + + client.list_evaluations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_evaluations_rest_bad_request( + transport: str = "rest", + request_type=document_processor_service.ListEvaluationsRequest, +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_evaluations(request) + + +def test_list_evaluations_rest_flattened(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_processor_service.ListEvaluationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_processor_service.ListEvaluationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_evaluations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{parent=projects/*/locations/*/processors/*/processorVersions/*}/evaluations" + % client.transport._host, + args[1], + ) + + +def test_list_evaluations_rest_flattened_error(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_evaluations( + document_processor_service.ListEvaluationsRequest(), + parent="parent_value", + ) + + +def test_list_evaluations_rest_pager(transport: str = "rest"): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_processor_service.ListEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + next_page_token="abc", + ), + document_processor_service.ListEvaluationsResponse( + evaluations=[], + next_page_token="def", + ), + document_processor_service.ListEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + ], + next_page_token="ghi", + ), + document_processor_service.ListEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_processor_service.ListEvaluationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/processors/sample3/processorVersions/sample4" + } + + pager = client.list_evaluations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, evaluation.Evaluation) for i in results) + + pages = list(client.list_evaluations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DocumentProcessorServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DocumentProcessorServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DocumentProcessorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DocumentProcessorServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DocumentProcessorServiceGrpcTransport, + transports.DocumentProcessorServiceGrpcAsyncIOTransport, + transports.DocumentProcessorServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DocumentProcessorServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DocumentProcessorServiceGrpcTransport, + ) + + +def test_document_processor_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DocumentProcessorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_document_processor_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.documentai_v1beta3.services.document_processor_service.transports.DocumentProcessorServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DocumentProcessorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "process_document", + "batch_process_documents", + "fetch_processor_types", + "list_processor_types", + "get_processor_type", "list_processors", "get_processor", "train_processor_version", @@ -6862,6 +13137,7 @@ def test_document_processor_service_transport_auth_adc(transport_class): [ transports.DocumentProcessorServiceGrpcTransport, transports.DocumentProcessorServiceGrpcAsyncIOTransport, + transports.DocumentProcessorServiceRestTransport, ], ) def test_document_processor_service_transport_auth_gdch_credentials(transport_class): @@ -6963,11 +13239,40 @@ def test_document_processor_service_grpc_transport_client_cert_source_for_mtls( ) +def test_document_processor_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DocumentProcessorServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_document_processor_service_rest_lro_client(): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_processor_service_host_no_port(transport_name): @@ -6978,7 +13283,11 @@ def test_document_processor_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("documentai.googleapis.com:443") + assert client.transport._host == ( + "documentai.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com" + ) @pytest.mark.parametrize( @@ -6986,6 +13295,7 @@ def test_document_processor_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_document_processor_service_host_with_port(transport_name): @@ -6996,7 +13306,96 @@ def test_document_processor_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("documentai.googleapis.com:8000") + assert client.transport._host == ( + "documentai.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://documentai.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_document_processor_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DocumentProcessorServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DocumentProcessorServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.process_document._session + session2 = client2.transport.process_document._session + assert session1 != session2 + session1 = client1.transport.batch_process_documents._session + session2 = client2.transport.batch_process_documents._session + assert session1 != session2 + session1 = client1.transport.fetch_processor_types._session + session2 = client2.transport.fetch_processor_types._session + assert session1 != session2 + session1 = client1.transport.list_processor_types._session + session2 = client2.transport.list_processor_types._session + assert session1 != session2 + session1 = client1.transport.get_processor_type._session + session2 = client2.transport.get_processor_type._session + assert session1 != session2 + session1 = client1.transport.list_processors._session + session2 = client2.transport.list_processors._session + assert session1 != session2 + session1 = client1.transport.get_processor._session + session2 = client2.transport.get_processor._session + assert session1 != session2 + session1 = client1.transport.train_processor_version._session + session2 = client2.transport.train_processor_version._session + assert session1 != session2 + session1 = client1.transport.get_processor_version._session + session2 = client2.transport.get_processor_version._session + assert session1 != session2 + session1 = client1.transport.list_processor_versions._session + session2 = client2.transport.list_processor_versions._session + assert session1 != session2 + session1 = client1.transport.delete_processor_version._session + session2 = client2.transport.delete_processor_version._session + assert session1 != session2 + session1 = client1.transport.deploy_processor_version._session + session2 = client2.transport.deploy_processor_version._session + assert session1 != session2 + session1 = client1.transport.undeploy_processor_version._session + session2 = client2.transport.undeploy_processor_version._session + assert session1 != session2 + session1 = client1.transport.create_processor._session + session2 = client2.transport.create_processor._session + assert session1 != session2 + session1 = client1.transport.delete_processor._session + session2 = client2.transport.delete_processor._session + assert session1 != session2 + session1 = client1.transport.enable_processor._session + session2 = client2.transport.enable_processor._session + assert session1 != session2 + session1 = client1.transport.disable_processor._session + session2 = client2.transport.disable_processor._session + assert session1 != session2 + session1 = client1.transport.set_default_processor_version._session + session2 = client2.transport.set_default_processor_version._session + assert session1 != session2 + session1 = client1.transport.review_document._session + session2 = client2.transport.review_document._session + assert session1 != session2 + session1 = client1.transport.evaluate_processor_version._session + session2 = client2.transport.evaluate_processor_version._session + assert session1 != session2 + session1 = client1.transport.get_evaluation._session + session2 = client2.transport.get_evaluation._session + assert session1 != session2 + session1 = client1.transport.list_evaluations._session + session2 = client2.transport.list_evaluations._session + assert session1 != session2 def test_document_processor_service_grpc_transport_channel(): @@ -7446,6 +13845,294 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DocumentProcessorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_cancel_operation(transport: str = "grpc"): client = DocumentProcessorServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8167,6 +14854,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -8184,6 +14872,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 04f7e2f6cbe4c3a45271b8ff6f6fd1eb38ad14cc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 18:15:36 -0600 Subject: [PATCH 4/4] chore(main): release 2.12.0 (#460) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/documentai/gapic_version.py | 2 +- google/cloud/documentai_v1/gapic_version.py | 2 +- google/cloud/documentai_v1beta2/gapic_version.py | 2 +- google/cloud/documentai_v1beta3/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.documentai.v1.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta2.json | 2 +- .../snippet_metadata_google.cloud.documentai.v1beta3.json | 2 +- 9 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 7d90fceb..ff587c6e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.11.0" + ".": "2.12.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 631dc04f..38730c4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.12.0](https://github.com/googleapis/python-documentai/compare/v2.11.0...v2.12.0) (2023-02-08) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#459](https://github.com/googleapis/python-documentai/issues/459)) ([02d06d6](https://github.com/googleapis/python-documentai/commit/02d06d663a99062a98b6dc0b699ffdec38e2e5de)) + ## [2.11.0](https://github.com/googleapis/python-documentai/compare/v2.10.0...v2.11.0) (2023-02-07) diff --git a/google/cloud/documentai/gapic_version.py b/google/cloud/documentai/gapic_version.py index bb74f811..16ae0e95 100644 --- a/google/cloud/documentai/gapic_version.py +++ b/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/google/cloud/documentai_v1/gapic_version.py b/google/cloud/documentai_v1/gapic_version.py index bb74f811..16ae0e95 100644 --- a/google/cloud/documentai_v1/gapic_version.py +++ b/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/google/cloud/documentai_v1beta2/gapic_version.py b/google/cloud/documentai_v1beta2/gapic_version.py index bb74f811..16ae0e95 100644 --- a/google/cloud/documentai_v1beta2/gapic_version.py +++ b/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/google/cloud/documentai_v1beta3/gapic_version.py b/google/cloud/documentai_v1beta3/gapic_version.py index bb74f811..16ae0e95 100644 --- a/google/cloud/documentai_v1beta3/gapic_version.py +++ b/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.11.0" # {x-release-please-version} +__version__ = "2.12.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 0df77e65..fe28a6f0 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.12.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index ef56bd7e..bd0b6d31 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.12.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index f02752f1..0fc04ebf 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "0.1.0" + "version": "2.12.0" }, "snippets": [ { pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy